All data used by this script was gathered using the Harmony Rosetta-CLI fork using the parse:supply command.
The script was written quickly and is NOT optimal for parsing the data, nor is it memory friendly. If you wish to run this script yourself, you have been warned.
import numpy as np
import json
import os
import plotly.graph_objects as go
from collections import defaultdict
import warnings; warnings.simplefilter('ignore')
from IPython.display import clear_output
SHARD = 2
STAKING_EPOCH = 186
GRAPH_DIR = "./graphs"
if not os.path.exists(GRAPH_DIR):
os.makedirs(GRAPH_DIR)
DATA_DIR = "/Users/danielvdm/go/src/github.com/Daniel-VDM/jupyter/hmy_chain_parse"
DATA_FILE_PREFIX = "parse_output"
ACC_FILE_PREFIX = "parse_last_seen_account"
def load_data(shard=0):
data_dir = f"{DATA_DIR}/s{shard}/parse"
data_files= [f for f in os.listdir(data_dir) if f.startswith(DATA_FILE_PREFIX)]
data_files.sort(key=lambda e: int(e.split(">")[-1]))
acc_files = [f for f in os.listdir(data_dir) if f.startswith(ACC_FILE_PREFIX)]
assert len(acc_files) > 0
blocks, unique_accs = [], set()
for path in data_files:
blk_path = f"{data_dir}/{path}"
with open(blk_path, 'r') as f:
clear_output(wait=True)
print(f"loading block data from: {blk_path}")
for blk in f:
blocks.append(json.loads(blk))
clear_output(wait=True)
print(f"sorting loaded block data...")
blocks.sort(key=lambda e: e["block_identifier"]["index"])
for path in acc_files:
acc_path = f"{data_dir}/{path}"
with open(acc_path, 'r') as f:
clear_output(wait=True)
print(f"loading account data from: {acc_path}")
for acc in f:
if acc.startswith("one1"):
unique_accs.add(acc)
clear_output(wait=True)
print(f"Loaded data for shard {shard} successfully!")
return {
"blocks": np.array(blocks),
"unique_accounts": unique_accs,
}
data = load_data(SHARD)
Loaded data for shard 2 successfully!
Your DATA_DIR should be organized to something like the following directory tree to run the code as is.
.
├── s0
│ └── parse
│ ├── parse_db_s0
│ │ └── parse-supply
│ │ └── 942e235983792e8b72b23cc0dd2d49940e1528c81e9058f4e8412429a9860dfb
│ │ ├── 000813.vlog
│ │ ├── 000838.sst
│ │ ├── KEYREGISTRY
│ │ └── MANIFEST
│ ├── parse_last_seen_accounts_<2021-01-18\ 01:57:08.172922727\ +0000\ UTC\ m=+0.046201073>
│ ├── parse_output_<2021-01-18\ 01:57:08.172902904\ +0000\ UTC\ m=+0.046181262>0
│ ├── parse_output_<2021-01-18\ 01:57:08.172902904\ +0000\ UTC\ m=+0.046181262>1
│ ├── parse_output_<2021-01-18\ 01:57:08.172902904\ +0000\ UTC\ m=+0.046181262>10
│ └── parse_output_<2021-01-18\ 01:57:08.172902904\ +0000\ UTC\ m=+0.046181262>99
├── s1
│ └── parse
│ ├── parse_db_s1
│ │ └── parse-supply
│ │ └── 6b948d461136a919989fbc8252c99bab80b07aed31a3ae39fb25ac9261eb1cca
│ │ ├── 000791.vlog
│ │ ├── 000814.sst
│ │ ├── KEYREGISTRY
│ │ └── MANIFEST
│ ├── parse_last_seen_accounts_<2021-01-18\ 01:57:17.570517136\ +0000\ UTC\ m=+0.049003351>
│ ├── parse_output_<2021-01-18\ 01:57:17.570491926\ +0000\ UTC\ m=+0.048978151>0
│ ├── parse_output_<2021-01-18\ 01:57:17.570491926\ +0000\ UTC\ m=+0.048978151>1
│ └── parse_output_<2021-01-18\ 01:57:17.570491926\ +0000\ UTC\ m=+0.048978151>99
├── s2
│ └── parse
│ ├── parse_db_s2
│ │ └── parse-supply
│ │ └── 8e94105e15f12b879263055c007bdb1040f4f8c17a626d3727b5a0633d016e58
│ │ ├── 000803.vlog
│ │ ├── 000828.sst
│ │ ├── KEYREGISTRY
│ │ └── MANIFEST
│ ├── parse_last_seen_accounts_<2021-01-18\ 01:57:35.646789168\ +0000\ UTC\ m=+0.051387060>
│ ├── parse_output_<2021-01-18\ 01:57:35.64673466\ +0000\ UTC\ m=+0.051332553>0
│ ├── parse_output_<2021-01-18\ 01:57:35.64673466\ +0000\ UTC\ m=+0.051332553>1
│ ├── parse_output_<2021-01-18\ 01:57:35.64673466\ +0000\ UTC\ m=+0.051332553>10
│ ├── parse_output_<2021-01-18\ 01:57:35.64673466\ +0000\ UTC\ m=+0.051332553>100
│ └── parse_output_<2021-01-18\ 01:57:35.64673466\ +0000\ UTC\ m=+0.051332553>99
└── s3
└── parse
├── parse_db_s3
│ └── parse-supply
│ └── d11142a30812bbe65b8b2cda16a895ccdebbe71844c54df0cd812481e345244b
│ ├── 000797.vlog
│ ├── 000822.sst
│ ├── KEYREGISTRY
│ └── MANIFEST
├── parse_last_seen_accounts_<2021-01-18\ 01:57:45.885611423\ +0000\ UTC\ m=+0.061823480>
├── parse_output_<2021-01-18\ 01:57:45.885573374\ +0000\ UTC\ m=+0.061785449>0
├── parse_output_<2021-01-18\ 01:57:45.885573374\ +0000\ UTC\ m=+0.061785449>1
├── parse_output_<2021-01-18\ 01:57:45.885573374\ +0000\ UTC\ m=+0.061785449>10
└── parse_output_<2021-01-18\ 01:57:45.885573374\ +0000\ UTC\ m=+0.061785449>99
data["blocks"] is a list of Block JSON objects (stored as a dict) order by block index.
The Block object looks like the following:
{
"block_identifier": {
"index": 0,
"hash": "0xb4d158b82ac8a653c42b78697ab1cd0c6a0d9a15ab3bc34130f0b719fb174d2a"
},
"block_epoch": 0,
"approx_block_rewards_so_far": 0,
"approx_circulating_supply_so_far": 1.26e+28,
"block_rewards": 0,
"total_amount_deducted_from_accounts": 0,
"total_amount_credited_from_accounts": 1.26e+28,
"number_of_contracts_created": 0,
"number_of_transactions": 1,
"number_of_accounts": 1,
"amount_transferred": 0,
"cross_shard_amount_received": 0,
"cross_shard_amount_sent": 0,
"gas_fees": 0,
"approx_number_of_unique_accounts_so_far": 1
}
def as_one(val):
""" Converts val (in ATTO) to ONE.
"""
return np.multiply(val, 1e-18)
def get_blk_index(b):
"""
Get the block's index
"""
return b["block_identifier"]["index"]
def get_stats_by_epoch(blocks, starting, ending):
""" Calculate stats from the starting epoch (inclusive) to the ending epoch (exclusive)
"""
def include(blk):
return starting <= blk["block_epoch"] < ending
print(f"calculating stats from epoch [{starting}, {ending})")
return get_basic_stats(blocks, include)
def get_stats_by_block(blocks, starting, ending):
""" Calculate stats from the starting block (inclusive) to the ending block (exclusive)
"""
def include(blk):
return starting <= blk["block_identifier"]["index"] < ending
print(f"calculating stats from block [{starting}, {ending})")
return get_basic_stats(blocks, include)
pre_staking_base_block_rewards = 24000000000000000000 # In ATTO
def get_basic_stats(blocks, inclusion_fn=lambda blk: True):
"""
Goes through all of the given blocks and calculates the basic stats for
every block that passes the given inclusion function.
"""
counters = defaultdict(int)
max_epoch, min_epoch, max_block, min_block = -float("inf"), float("inf"), -float("inf"), float("inf")
max_tx_per_block, max_tx_per_block_id = -float("inf"), None
max_accs_per_block, max_accs_per_block_id = -float("inf"), None
min_tx_per_block, min_tx_per_block_id = float("inf"), None
min_accs_per_block, min_accs_per_block_id = float("inf"), None
for blk in blocks:
if not inclusion_fn(blk):
continue
counters["block_rewards"] += blk["block_rewards"]
counters["block_count"] += 1
counters["gas_fees"] += blk["gas_fees"]
tx_count = blk["number_of_transactions"]
if blk["block_epoch"] < STAKING_EPOCH:
counters["coinbase_rewards"] += max(blk["block_rewards"] - pre_staking_base_block_rewards, 0)
tx_count -= 1 # Do not count side effect tx since each block has one.
counters["transaction_count"] += tx_count
counters["cross_shard_incomming_funds"] += blk["cross_shard_amount_received"]
counters["cross_shard_outgoing_funds"] += blk["cross_shard_amount_sent"]
counters["contract_creations"] += blk["number_of_contracts_created"]
if tx_count > max_tx_per_block:
max_tx_per_block = tx_count
max_tx_per_block_id = blk["block_identifier"]
if tx_count < min_tx_per_block:
min_tx_per_block = tx_count
min_tx_per_block_id = blk["block_identifier"]
if blk["number_of_accounts"] > max_accs_per_block:
max_accs_per_block = blk["number_of_accounts"]
max_accs_per_block_id = blk["block_identifier"]
if blk["number_of_accounts"] < min_accs_per_block:
min_accs_per_block = blk["number_of_accounts"]
min_accs_per_block_id = blk["block_identifier"]
counters["total_deducted_amount"] += blk["total_amount_deducted_from_accounts"]
counters["total_credited_amount"] += blk["total_amount_credited_from_accounts"]
counters["total_supply"] += (blk["total_amount_credited_from_accounts"] - blk["total_amount_deducted_from_accounts"])
max_epoch = max(max_epoch, blk["block_epoch"])
max_block = max(max_block, blk["block_identifier"]["index"])
min_epoch = min(min_epoch, blk["block_epoch"])
min_block = min(min_block, blk["block_identifier"]["index"])
return {
"funds": {
"block_rewards": as_one(counters["block_rewards"]),
"gas": as_one(counters["gas_fees"]),
"coinbase_rewards": as_one(counters["coinbase_rewards"]),
"cross_shard_received": as_one(counters["cross_shard_incomming_funds"]),
"cross_shard_sent": as_one(counters["cross_shard_outgoing_funds"]),
"supply": {
"circulating_contribution": as_one(counters["total_supply"]),
"deducted_from_accounts": as_one(counters["total_deducted_amount"]),
"credited_to_accounts": as_one(counters["total_credited_amount"]),
},
},
"counts": {
"blocks": counters["block_count"],
"contract_creations": counters["contract_creations"],
"transaction": counters["transaction_count"],
},
"max": {
"transaction_per_block": {
"count": max_tx_per_block,
"identifier": max_tx_per_block_id,
},
"accounts_per_block": {
"count": max_accs_per_block,
"identifier": max_accs_per_block_id,
},
"seen_epoch": max_epoch,
"seen_block": max_block,
},
"min": {
"transaction_per_block": {
"count": min_tx_per_block,
"identifier": max_tx_per_block_id,
},
"accounts_per_block": {
"count": min_accs_per_block,
"identifier": max_accs_per_block_id,
},
"seen_epoch": min_epoch,
"seen_block": min_block,
}
}
def get_high_level_stats():
pre_staking_stats = get_stats_by_epoch(data["blocks"], 0, STAKING_EPOCH)
overall = get_stats_by_epoch(data["blocks"], 0, data["blocks"][-1]["block_epoch"]+1)
# staking_stats includes pre-steaking epoch (STAKING_EPOCH-1) for clearer staking stats
staking_stats = get_stats_by_epoch(data["blocks"], STAKING_EPOCH-1, data["blocks"][-1]["block_epoch"]+1)
clear_output(wait=True)
print("Pre-staking stats: " + json.dumps(pre_staking_stats, sort_keys=True, indent=4))
print()
print("Staking stats: " + json.dumps(staking_stats, sort_keys=True, indent=4))
print()
print("Overall stats: " + json.dumps(overall, sort_keys=True, indent=4))
get_high_level_stats()
Pre-staking stats: {
"counts": {
"blocks": 3326154,
"contract_creations": 2,
"transaction": 9514
},
"funds": {
"block_rewards": 79827648.23309702,
"coinbase_rewards": 0.23309700000000003,
"cross_shard_received": 62331.79677812801,
"cross_shard_sent": 13957924.71274405,
"gas": 0.23309700000000003,
"supply": {
"circulating_contribution": 65932055.084034085,
"credited_to_accounts": 137171171.35525537,
"deducted_from_accounts": 71239116.27122128
}
},
"max": {
"accounts_per_block": {
"count": 243,
"identifier": {
"hash": "0x5cd5e6c6ed033864cf7908e061e635e6cd15d4f951bdd7ba0a7d24b0311bfc70",
"index": 700814
}
},
"seen_block": 3326153,
"seen_epoch": 183,
"transaction_per_block": {
"count": 2,
"identifier": {
"hash": "0x58caba6021f3d7e7c65af196130cf57cd9e5247192672075a444821e233919ad",
"index": 789269
}
}
},
"min": {
"accounts_per_block": {
"count": 0,
"identifier": {
"hash": "0x5cd5e6c6ed033864cf7908e061e635e6cd15d4f951bdd7ba0a7d24b0311bfc70",
"index": 700814
}
},
"seen_block": 0,
"seen_epoch": 0,
"transaction_per_block": {
"count": 0,
"identifier": {
"hash": "0x58caba6021f3d7e7c65af196130cf57cd9e5247192672075a444821e233919ad",
"index": 789269
}
}
}
}
Staking stats: {
"counts": {
"blocks": 2040562,
"contract_creations": 0,
"transaction": 422
},
"funds": {
"block_rewards": 0.0,
"coinbase_rewards": 0.0,
"cross_shard_received": 2048119.2144413688,
"cross_shard_sent": 64144029.00343073,
"gas": 0.015261750003045001,
"supply": {
"circulating_contribution": -62095909.804251105,
"credited_to_accounts": 2118441.2115413686,
"deducted_from_accounts": 64214351.015792474
}
},
"max": {
"accounts_per_block": {
"count": 4,
"identifier": {
"hash": "0xd3d15f2360122ba11c856ea223c61b08b1d3bca53a63954e94d2e42a2c481048",
"index": 3638056
}
},
"seen_block": 5366715,
"seen_epoch": 304,
"transaction_per_block": {
"count": 2,
"identifier": {
"hash": "0x595c5af450691fdb0d990ecd103f9a9291506c8ddb949c23d5e6234f87fa5d6a",
"index": 3332307
}
}
},
"min": {
"accounts_per_block": {
"count": 0,
"identifier": {
"hash": "0xd3d15f2360122ba11c856ea223c61b08b1d3bca53a63954e94d2e42a2c481048",
"index": 3638056
}
},
"seen_block": 3326154,
"seen_epoch": 186,
"transaction_per_block": {
"count": 0,
"identifier": {
"hash": "0x595c5af450691fdb0d990ecd103f9a9291506c8ddb949c23d5e6234f87fa5d6a",
"index": 3332307
}
}
}
}
Overall stats: {
"counts": {
"blocks": 5366716,
"contract_creations": 2,
"transaction": 9936
},
"funds": {
"block_rewards": 79827648.23309702,
"coinbase_rewards": 0.23309700000000003,
"cross_shard_received": 2110451.011219497,
"cross_shard_sent": 78101953.71617478,
"gas": 0.248358750003045,
"supply": {
"circulating_contribution": 3836145.279782976,
"credited_to_accounts": 139289612.56679672,
"deducted_from_accounts": 135453467.28701374
}
},
"max": {
"accounts_per_block": {
"count": 243,
"identifier": {
"hash": "0x5cd5e6c6ed033864cf7908e061e635e6cd15d4f951bdd7ba0a7d24b0311bfc70",
"index": 700814
}
},
"seen_block": 5366715,
"seen_epoch": 304,
"transaction_per_block": {
"count": 2,
"identifier": {
"hash": "0x58caba6021f3d7e7c65af196130cf57cd9e5247192672075a444821e233919ad",
"index": 789269
}
}
},
"min": {
"accounts_per_block": {
"count": 0,
"identifier": {
"hash": "0x5cd5e6c6ed033864cf7908e061e635e6cd15d4f951bdd7ba0a7d24b0311bfc70",
"index": 700814
}
},
"seen_block": 0,
"seen_epoch": 0,
"transaction_per_block": {
"count": 0,
"identifier": {
"hash": "0x58caba6021f3d7e7c65af196130cf57cd9e5247192672075a444821e233919ad",
"index": 789269
}
}
}
}
def select_blocks_by_epoch(blocks, starting, ending):
"""
Selects the blocks starting at the given index (inclusive)
up to the given ending index (exclusive).
"""
def include(blk):
return starting <= blk["block_epoch"] < ending
print(f"selecting block from epoch [{starting}, {ending})")
return select_blocks(blocks, include)
def select_blocks_by_index(blocks, starting, ending, sample=1):
"""
Selects the blocks at every given sample starting at the given epoch (inclusive)
up to the given ending epoch (exclusive).
"""
def include(blk):
index = get_blk_index(blk)
return starting <= index < ending and index % sample == 0
print(f"selecting block from index [{starting}, {ending}) every {sample} samples")
return select_blocks(blocks, include)
def select_blocks(blocks, inclusion_fn=lambda blk: True):
"""
Select all blocks that satisfy the given inclusion function.
"""
return [b for b in blocks if inclusion_fn(b)]
def extract_x_y_col(blocks, x_extractor=lambda b: b["block_epoch"], y_extractor=lambda b: b["amount_transferred"]):
"""
Extracts the x & y col for each block SEQUENTIALLY with the given
extractor functions from the given blocks.
If given extractors return None for some block, it will NOT be added to the col.
"""
x, y = [], []
for blk in blocks:
next_x, next_y = x_extractor(blk), y_extractor(blk)
if next_x is not None:
x.append(next_x)
if next_y is not None:
y.append(next_y)
return x, y
def get_rewards_scatter_plt(blocks, sample=1):
def x_fn(b):
idx = get_blk_index(b)
if idx % sample == 0:
return idx
avg_sum = 0
def y_fn(b):
nonlocal avg_sum
avg_sum += as_one(b["block_rewards"])
if get_blk_index(b) % sample == 0:
to_return, avg_sum = avg_sum, 0
return to_return / sample
idx, val = extract_x_y_col(blocks, x_fn, y_fn)
return go.Scatter(x=idx, y=val, mode='lines', name="blk rewards")
def get_circulating_supply_plt(blocks, sample=1):
def x_fn(b):
idx = get_blk_index(b)
if idx % sample == 0:
return idx
avg_sum = 0
def y_fn(b):
nonlocal avg_sum
avg_sum += as_one(b["approx_circulating_supply_so_far"])
if get_blk_index(b) % sample == 0:
to_return, avg_sum = avg_sum, 0
return to_return / sample
idx, val = extract_x_y_col(blocks, x_fn, y_fn)
return go.Scatter(x=idx, y=val, mode='lines', name="supply")
def get_amount_transferred_plt(blocks, sample=1):
def x_fn(b):
idx = get_blk_index(b)
if idx % sample == 0:
return idx
total = 0
def y_fn(b):
nonlocal total
total += as_one(b["amount_transferred"])
if get_blk_index(b) % sample == 0:
to_return, total = total, 0
return to_return
idx, val = extract_x_y_col(blocks, x_fn, y_fn)
return go.Scatter(x=idx, y=val, mode='lines+markers', name="amount txed")
def get_gas_fees_plt(blocks, sample=1):
def x_fn(b):
idx = get_blk_index(b)
if idx % sample == 0:
return idx
total = 0
def y_fn(b):
nonlocal total
total += as_one(b["gas_fees"])
if get_blk_index(b) % sample == 0:
to_return, total = total, 0
return to_return
idx, val = extract_x_y_col(blocks, x_fn, y_fn)
return go.Scatter(x=idx, y=val, mode='lines+markers', name="gas fees")
def get_cx_in_plt(blocks, sample=1):
def x_fn(b):
idx = get_blk_index(b)
if idx % sample == 0:
return idx
total = 0
def y_fn(b):
nonlocal total
total += as_one(b["cross_shard_amount_received"])
if get_blk_index(b) % sample == 0:
to_return, total = total, 0
return to_return
idx, val = extract_x_y_col(blocks, x_fn, y_fn)
return go.Scatter(x=idx, y=val, mode='lines+markers', name="cx amount in")
def get_cx_out_plt(blocks, sample=1):
def x_fn(b):
idx = get_blk_index(b)
if idx % sample == 0:
return idx
total = 0
def y_fn(b):
nonlocal total
total += as_one(b["cross_shard_amount_sent"])
if get_blk_index(b) % sample == 0:
to_return, total = total, 0
return to_return
idx, val = extract_x_y_col(blocks, x_fn, y_fn)
return go.Scatter(x=idx, y=val, mode='lines+markers', name="cx amount out")
funds_graph_export_path = f"{GRAPH_DIR}/harmony_token_prog_s{SHARD}.html"
def visualize_funds(blocks, sample=1):
fig = go.Figure()
print("calculating rewards...")
fig.add_trace(get_rewards_scatter_plt(blocks, sample))
print("calculating supply...")
fig.add_trace(get_circulating_supply_plt(blocks, sample))
print("calculating amount transferred...")
fig.add_trace(get_amount_transferred_plt(blocks, sample))
print("calculating gas fees...")
fig.add_trace(get_gas_fees_plt(blocks, sample))
print("calculating cx in...")
fig.add_trace(get_cx_in_plt(blocks, sample))
print("calculating cx out...")
fig.add_trace(get_cx_out_plt(blocks, sample))
fig.update_layout(
title=f"ONE Token Progression (shard {SHARD})",
hovermode="x unified",
yaxis_title="ONE Tokens",
xaxis_title="Block Index",
)
clear_output(wait=True)
fig.write_html(funds_graph_export_path)
print(f"exported graph to: {funds_graph_export_path}")
fig.show()
def get_unique_accounts_seen_plt(blocks, sample=1):
def x_fn(b):
idx = get_blk_index(b)
if idx % sample == 0:
return idx
def y_fn(b):
if get_blk_index(b) % sample == 0:
return b["approx_number_of_unique_accounts_so_far"]
idx, val = extract_x_y_col(blocks, x_fn, y_fn)
return go.Scatter(x=idx, y=val, mode='lines', name="unique accs")
def get_accs_per_block_plt(blocks, sample=1):
def x_fn(b):
idx = get_blk_index(b)
if idx % sample == 0:
return idx
avg_sum = 0
def y_fn(b):
nonlocal avg_sum
avg_sum += b["number_of_accounts"]
if get_blk_index(b) % sample == 0:
to_return, avg_sum = avg_sum, 0
return to_return / sample
idx, val = extract_x_y_col(blocks, x_fn, y_fn)
return go.Scatter(x=idx, y=val, mode='lines+markers', name="acc/block avg")
account_graph_export_path = f"{GRAPH_DIR}/harmony_account_prog_s{SHARD}.html"
def visualize_accounts(blocks, sample=1):
fig = go.Figure()
print("calculating unique accounts...")
fig.add_trace(get_unique_accounts_seen_plt(blocks, sample))
print("calculating accounts per block...")
fig.add_trace(get_accs_per_block_plt(blocks, sample))
fig.update_layout(
title=f"Account Progression (shard {SHARD})",
hovermode="x unified",
yaxis_title="Accounts",
xaxis_title="Block Index",
)
clear_output(wait=True)
fig.write_html(account_graph_export_path)
print(f"exported graph to: {account_graph_export_path}")
fig.show()
def get_transactions_plt(blocks, sample=1):
def x_fn(b):
idx = get_blk_index(b)
if idx % sample == 0:
return idx
avg_sum = 0
def y_fn(b):
nonlocal avg_sum
if b["block_epoch"] < STAKING_EPOCH:
avg_sum += b["number_of_transactions"] - 1
else:
avg_sum += b["number_of_transactions"]
if get_blk_index(b) % sample == 0:
to_return, avg_sum = avg_sum, 0
return to_return / sample
idx, val = extract_x_y_col(blocks, x_fn, y_fn)
return go.Scatter(x=idx, y=val, mode='lines', name="transactions")
transactions_graph_export_path = f"{GRAPH_DIR}/harmony_contract_prog_s{SHARD}.html"
def visualize_transactions(blocks, sample=1):
fig = go.Figure()
print("calculating transactions...")
fig.add_trace(get_transactions_plt(blocks, sample))
fig.update_layout(
title=f"Transaction Progression (shard {SHARD})",
hovermode="x unified",
yaxis_title="Count",
xaxis_title="Block Index",
)
clear_output(wait=True)
fig.write_html(transactions_graph_export_path)
print(f"exported graph to: {transactions_graph_export_path}")
fig.show()
def get_contract_creation_plt(blocks, sample=1):
def x_fn(b):
idx = get_blk_index(b)
if idx % sample == 0:
return idx
total = 0
def y_fn(b):
nonlocal total
total += b["number_of_contracts_created"]
if get_blk_index(b) % sample == 0:
to_return, total = total, 0
return to_return
idx, val = extract_x_y_col(blocks, x_fn, y_fn)
return go.Scatter(x=idx, y=val, mode='lines+markers', name="contract creation")
contract_graph_export_path = f"{GRAPH_DIR}/harmony_contract_prog_s{SHARD}.html"
def visualize_contracts(blocks, sample=1):
fig = go.Figure()
print("calculating contract creations...")
fig.add_trace(get_contract_creation_plt(blocks, sample))
fig.update_layout(
title=f"Contract Progression (shard {SHARD})",
hovermode="x unified",
yaxis_title="Count",
xaxis_title="Block Index",
)
clear_output(wait=True)
fig.write_html(contract_graph_export_path)
print(f"exported graph to: {contract_graph_export_path}")
fig.show()
SAMPLE=1000
visualize_funds(data["blocks"], SAMPLE)
exported graph to: ./graphs/harmony_token_prog_s2.html
visualize_accounts(data["blocks"], SAMPLE)
exported graph to: ./graphs/harmony_account_prog_s2.html
visualize_transactions(data["blocks"], SAMPLE)
exported graph to: ./graphs/harmony_contract_prog_s2.html
visualize_contracts(data["blocks"], SAMPLE)
exported graph to: ./graphs/harmony_contract_prog_s2.html
Each data point is an average or sum (depending on data) over the SAMPLE number of blocks before (and including) the indicated block index.
def export_by_epoch(blocks, directory, starting, ending):
"""
Exports the Blocks data to a CSV file in the given Directory.
Starting at the given starting epoch (inclusive) to the ending epoch (exclusive).
"""
def include(blk):
return starting <= blk["block_epoch"] < ending
print(f"exporting from epoch [{starting}, {ending})")
export(blocks, directory, include)
def export_by_block(blocks, directory, starting, ending):
"""
Exports the Blocks data to a CSV file in the given Directory.
Starting at the given starting block (inclusive) to the ending block (exclusive).
"""
def include(blk):
return starting <= blk["block_identifier"]["index"] < ending
print(f"exporting from block [{starting}, {ending})")
export(blocks, directory, include)
def export(blocks, directory, inclusion_fn=lambda blk: True):
"""
Exports all of the given Blocks that pass the given inclusion function
"""
assert os.path.isdir(directory)
save_path = f"{directory}/blocks_export_s{SHARD}.csv"
print(f"exporting to: {save_path}")
with open(save_path, 'w') as f:
f.write("index, hash, epoch, rewards, total_deductions, total_credits, " +
"contracts_deployed, tx_count, acc_count, txed_amount, cx_in, cx_out\n")
for blk in blocks:
if not inclusion_fn(blk):
continue
f.write(f'{blk["block_identifier"]["index"]}, {blk["block_identifier"]["hash"]}, ' +
f'{blk["block_epoch"]}, {blk["block_rewards"]}, {blk["total_amount_deducted_from_accounts"]}, ' +
f'{blk["total_amount_credited_from_accounts"]}, {blk["number_of_contracts_created"]}, ' +
f'{blk["number_of_transactions"]}, {blk["number_of_accounts"]}, ' +
f'{blk["amount_transferred"]}, {blk["cross_shard_amount_received"]}, ' +
f'{blk["cross_shard_amount_sent"]}\n')
clear_output(wait=True)
print(f"finished exporting to {save_path}")
CSV_EXPORT_DIRECTORY = "/tmp"
export_by_epoch(data["blocks"], CSV_EXPORT_DIRECTORY, 0, data["blocks"][-1]["block_epoch"]+1)
# export_by_block(data["blocks"], CSV_EXPORT_DIRECTORY, 0, 1000000)
finished exporting to /tmp/blocks_export_s2.csv